Chapter 8.2 - Deep Dream


In [1]:
from keras.applications.inception_v3 import InceptionV3
from keras import backend as K


C:\ProgramData\Anaconda3\lib\site-packages\h5py\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.
  from ._conv import register_converters as _register_converters
Using TensorFlow backend.

In [2]:
# We not train a model, thus we need to disable all training-specific operations
K.set_learning_phase(0)

In [3]:
model = InceptionV3(weights = 'imagenet',
                    include_top = False)

In [4]:
model.summary()


__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            (None, None, None, 3 0                                            
__________________________________________________________________________________________________
conv2d_1 (Conv2D)               (None, None, None, 3 864         input_1[0][0]                    
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, None, None, 3 96          conv2d_1[0][0]                   
__________________________________________________________________________________________________
activation_1 (Activation)       (None, None, None, 3 0           batch_normalization_1[0][0]      
__________________________________________________________________________________________________
conv2d_2 (Conv2D)               (None, None, None, 3 9216        activation_1[0][0]               
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, None, None, 3 96          conv2d_2[0][0]                   
__________________________________________________________________________________________________
activation_2 (Activation)       (None, None, None, 3 0           batch_normalization_2[0][0]      
__________________________________________________________________________________________________
conv2d_3 (Conv2D)               (None, None, None, 6 18432       activation_2[0][0]               
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, None, None, 6 192         conv2d_3[0][0]                   
__________________________________________________________________________________________________
activation_3 (Activation)       (None, None, None, 6 0           batch_normalization_3[0][0]      
__________________________________________________________________________________________________
max_pooling2d_1 (MaxPooling2D)  (None, None, None, 6 0           activation_3[0][0]               
__________________________________________________________________________________________________
conv2d_4 (Conv2D)               (None, None, None, 8 5120        max_pooling2d_1[0][0]            
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, None, None, 8 240         conv2d_4[0][0]                   
__________________________________________________________________________________________________
activation_4 (Activation)       (None, None, None, 8 0           batch_normalization_4[0][0]      
__________________________________________________________________________________________________
conv2d_5 (Conv2D)               (None, None, None, 1 138240      activation_4[0][0]               
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, None, None, 1 576         conv2d_5[0][0]                   
__________________________________________________________________________________________________
activation_5 (Activation)       (None, None, None, 1 0           batch_normalization_5[0][0]      
__________________________________________________________________________________________________
max_pooling2d_2 (MaxPooling2D)  (None, None, None, 1 0           activation_5[0][0]               
__________________________________________________________________________________________________
conv2d_9 (Conv2D)               (None, None, None, 6 12288       max_pooling2d_2[0][0]            
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, None, None, 6 192         conv2d_9[0][0]                   
__________________________________________________________________________________________________
activation_9 (Activation)       (None, None, None, 6 0           batch_normalization_9[0][0]      
__________________________________________________________________________________________________
conv2d_7 (Conv2D)               (None, None, None, 4 9216        max_pooling2d_2[0][0]            
__________________________________________________________________________________________________
conv2d_10 (Conv2D)              (None, None, None, 9 55296       activation_9[0][0]               
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, None, None, 4 144         conv2d_7[0][0]                   
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, None, None, 9 288         conv2d_10[0][0]                  
__________________________________________________________________________________________________
activation_7 (Activation)       (None, None, None, 4 0           batch_normalization_7[0][0]      
__________________________________________________________________________________________________
activation_10 (Activation)      (None, None, None, 9 0           batch_normalization_10[0][0]     
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, None, None, 1 0           max_pooling2d_2[0][0]            
__________________________________________________________________________________________________
conv2d_6 (Conv2D)               (None, None, None, 6 12288       max_pooling2d_2[0][0]            
__________________________________________________________________________________________________
conv2d_8 (Conv2D)               (None, None, None, 6 76800       activation_7[0][0]               
__________________________________________________________________________________________________
conv2d_11 (Conv2D)              (None, None, None, 9 82944       activation_10[0][0]              
__________________________________________________________________________________________________
conv2d_12 (Conv2D)              (None, None, None, 3 6144        average_pooling2d_1[0][0]        
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, None, None, 6 192         conv2d_6[0][0]                   
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, None, None, 6 192         conv2d_8[0][0]                   
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, None, None, 9 288         conv2d_11[0][0]                  
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, None, None, 3 96          conv2d_12[0][0]                  
__________________________________________________________________________________________________
activation_6 (Activation)       (None, None, None, 6 0           batch_normalization_6[0][0]      
__________________________________________________________________________________________________
activation_8 (Activation)       (None, None, None, 6 0           batch_normalization_8[0][0]      
__________________________________________________________________________________________________
activation_11 (Activation)      (None, None, None, 9 0           batch_normalization_11[0][0]     
__________________________________________________________________________________________________
activation_12 (Activation)      (None, None, None, 3 0           batch_normalization_12[0][0]     
__________________________________________________________________________________________________
mixed0 (Concatenate)            (None, None, None, 2 0           activation_6[0][0]               
                                                                 activation_8[0][0]               
                                                                 activation_11[0][0]              
                                                                 activation_12[0][0]              
__________________________________________________________________________________________________
conv2d_16 (Conv2D)              (None, None, None, 6 16384       mixed0[0][0]                     
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, None, None, 6 192         conv2d_16[0][0]                  
__________________________________________________________________________________________________
activation_16 (Activation)      (None, None, None, 6 0           batch_normalization_16[0][0]     
__________________________________________________________________________________________________
conv2d_14 (Conv2D)              (None, None, None, 4 12288       mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_17 (Conv2D)              (None, None, None, 9 55296       activation_16[0][0]              
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, None, None, 4 144         conv2d_14[0][0]                  
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, None, None, 9 288         conv2d_17[0][0]                  
__________________________________________________________________________________________________
activation_14 (Activation)      (None, None, None, 4 0           batch_normalization_14[0][0]     
__________________________________________________________________________________________________
activation_17 (Activation)      (None, None, None, 9 0           batch_normalization_17[0][0]     
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, None, None, 2 0           mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_13 (Conv2D)              (None, None, None, 6 16384       mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_15 (Conv2D)              (None, None, None, 6 76800       activation_14[0][0]              
__________________________________________________________________________________________________
conv2d_18 (Conv2D)              (None, None, None, 9 82944       activation_17[0][0]              
__________________________________________________________________________________________________
conv2d_19 (Conv2D)              (None, None, None, 6 16384       average_pooling2d_2[0][0]        
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, None, None, 6 192         conv2d_13[0][0]                  
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, None, None, 6 192         conv2d_15[0][0]                  
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, None, None, 9 288         conv2d_18[0][0]                  
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, None, None, 6 192         conv2d_19[0][0]                  
__________________________________________________________________________________________________
activation_13 (Activation)      (None, None, None, 6 0           batch_normalization_13[0][0]     
__________________________________________________________________________________________________
activation_15 (Activation)      (None, None, None, 6 0           batch_normalization_15[0][0]     
__________________________________________________________________________________________________
activation_18 (Activation)      (None, None, None, 9 0           batch_normalization_18[0][0]     
__________________________________________________________________________________________________
activation_19 (Activation)      (None, None, None, 6 0           batch_normalization_19[0][0]     
__________________________________________________________________________________________________
mixed1 (Concatenate)            (None, None, None, 2 0           activation_13[0][0]              
                                                                 activation_15[0][0]              
                                                                 activation_18[0][0]              
                                                                 activation_19[0][0]              
__________________________________________________________________________________________________
conv2d_23 (Conv2D)              (None, None, None, 6 18432       mixed1[0][0]                     
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, None, None, 6 192         conv2d_23[0][0]                  
__________________________________________________________________________________________________
activation_23 (Activation)      (None, None, None, 6 0           batch_normalization_23[0][0]     
__________________________________________________________________________________________________
conv2d_21 (Conv2D)              (None, None, None, 4 13824       mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_24 (Conv2D)              (None, None, None, 9 55296       activation_23[0][0]              
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, None, None, 4 144         conv2d_21[0][0]                  
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, None, None, 9 288         conv2d_24[0][0]                  
__________________________________________________________________________________________________
activation_21 (Activation)      (None, None, None, 4 0           batch_normalization_21[0][0]     
__________________________________________________________________________________________________
activation_24 (Activation)      (None, None, None, 9 0           batch_normalization_24[0][0]     
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, None, None, 2 0           mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_20 (Conv2D)              (None, None, None, 6 18432       mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_22 (Conv2D)              (None, None, None, 6 76800       activation_21[0][0]              
__________________________________________________________________________________________________
conv2d_25 (Conv2D)              (None, None, None, 9 82944       activation_24[0][0]              
__________________________________________________________________________________________________
conv2d_26 (Conv2D)              (None, None, None, 6 18432       average_pooling2d_3[0][0]        
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, None, None, 6 192         conv2d_20[0][0]                  
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, None, None, 6 192         conv2d_22[0][0]                  
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, None, None, 9 288         conv2d_25[0][0]                  
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, None, None, 6 192         conv2d_26[0][0]                  
__________________________________________________________________________________________________
activation_20 (Activation)      (None, None, None, 6 0           batch_normalization_20[0][0]     
__________________________________________________________________________________________________
activation_22 (Activation)      (None, None, None, 6 0           batch_normalization_22[0][0]     
__________________________________________________________________________________________________
activation_25 (Activation)      (None, None, None, 9 0           batch_normalization_25[0][0]     
__________________________________________________________________________________________________
activation_26 (Activation)      (None, None, None, 6 0           batch_normalization_26[0][0]     
__________________________________________________________________________________________________
mixed2 (Concatenate)            (None, None, None, 2 0           activation_20[0][0]              
                                                                 activation_22[0][0]              
                                                                 activation_25[0][0]              
                                                                 activation_26[0][0]              
__________________________________________________________________________________________________
conv2d_28 (Conv2D)              (None, None, None, 6 18432       mixed2[0][0]                     
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, None, None, 6 192         conv2d_28[0][0]                  
__________________________________________________________________________________________________
activation_28 (Activation)      (None, None, None, 6 0           batch_normalization_28[0][0]     
__________________________________________________________________________________________________
conv2d_29 (Conv2D)              (None, None, None, 9 55296       activation_28[0][0]              
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, None, None, 9 288         conv2d_29[0][0]                  
__________________________________________________________________________________________________
activation_29 (Activation)      (None, None, None, 9 0           batch_normalization_29[0][0]     
__________________________________________________________________________________________________
conv2d_27 (Conv2D)              (None, None, None, 3 995328      mixed2[0][0]                     
__________________________________________________________________________________________________
conv2d_30 (Conv2D)              (None, None, None, 9 82944       activation_29[0][0]              
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, None, None, 3 1152        conv2d_27[0][0]                  
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, None, None, 9 288         conv2d_30[0][0]                  
__________________________________________________________________________________________________
activation_27 (Activation)      (None, None, None, 3 0           batch_normalization_27[0][0]     
__________________________________________________________________________________________________
activation_30 (Activation)      (None, None, None, 9 0           batch_normalization_30[0][0]     
__________________________________________________________________________________________________
max_pooling2d_3 (MaxPooling2D)  (None, None, None, 2 0           mixed2[0][0]                     
__________________________________________________________________________________________________
mixed3 (Concatenate)            (None, None, None, 7 0           activation_27[0][0]              
                                                                 activation_30[0][0]              
                                                                 max_pooling2d_3[0][0]            
__________________________________________________________________________________________________
conv2d_35 (Conv2D)              (None, None, None, 1 98304       mixed3[0][0]                     
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, None, None, 1 384         conv2d_35[0][0]                  
__________________________________________________________________________________________________
activation_35 (Activation)      (None, None, None, 1 0           batch_normalization_35[0][0]     
__________________________________________________________________________________________________
conv2d_36 (Conv2D)              (None, None, None, 1 114688      activation_35[0][0]              
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, None, None, 1 384         conv2d_36[0][0]                  
__________________________________________________________________________________________________
activation_36 (Activation)      (None, None, None, 1 0           batch_normalization_36[0][0]     
__________________________________________________________________________________________________
conv2d_32 (Conv2D)              (None, None, None, 1 98304       mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_37 (Conv2D)              (None, None, None, 1 114688      activation_36[0][0]              
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, None, None, 1 384         conv2d_32[0][0]                  
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, None, None, 1 384         conv2d_37[0][0]                  
__________________________________________________________________________________________________
activation_32 (Activation)      (None, None, None, 1 0           batch_normalization_32[0][0]     
__________________________________________________________________________________________________
activation_37 (Activation)      (None, None, None, 1 0           batch_normalization_37[0][0]     
__________________________________________________________________________________________________
conv2d_33 (Conv2D)              (None, None, None, 1 114688      activation_32[0][0]              
__________________________________________________________________________________________________
conv2d_38 (Conv2D)              (None, None, None, 1 114688      activation_37[0][0]              
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, None, None, 1 384         conv2d_33[0][0]                  
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, None, None, 1 384         conv2d_38[0][0]                  
__________________________________________________________________________________________________
activation_33 (Activation)      (None, None, None, 1 0           batch_normalization_33[0][0]     
__________________________________________________________________________________________________
activation_38 (Activation)      (None, None, None, 1 0           batch_normalization_38[0][0]     
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, None, None, 7 0           mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_31 (Conv2D)              (None, None, None, 1 147456      mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_34 (Conv2D)              (None, None, None, 1 172032      activation_33[0][0]              
__________________________________________________________________________________________________
conv2d_39 (Conv2D)              (None, None, None, 1 172032      activation_38[0][0]              
__________________________________________________________________________________________________
conv2d_40 (Conv2D)              (None, None, None, 1 147456      average_pooling2d_4[0][0]        
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, None, None, 1 576         conv2d_31[0][0]                  
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, None, None, 1 576         conv2d_34[0][0]                  
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, None, None, 1 576         conv2d_39[0][0]                  
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, None, None, 1 576         conv2d_40[0][0]                  
__________________________________________________________________________________________________
activation_31 (Activation)      (None, None, None, 1 0           batch_normalization_31[0][0]     
__________________________________________________________________________________________________
activation_34 (Activation)      (None, None, None, 1 0           batch_normalization_34[0][0]     
__________________________________________________________________________________________________
activation_39 (Activation)      (None, None, None, 1 0           batch_normalization_39[0][0]     
__________________________________________________________________________________________________
activation_40 (Activation)      (None, None, None, 1 0           batch_normalization_40[0][0]     
__________________________________________________________________________________________________
mixed4 (Concatenate)            (None, None, None, 7 0           activation_31[0][0]              
                                                                 activation_34[0][0]              
                                                                 activation_39[0][0]              
                                                                 activation_40[0][0]              
__________________________________________________________________________________________________
conv2d_45 (Conv2D)              (None, None, None, 1 122880      mixed4[0][0]                     
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, None, None, 1 480         conv2d_45[0][0]                  
__________________________________________________________________________________________________
activation_45 (Activation)      (None, None, None, 1 0           batch_normalization_45[0][0]     
__________________________________________________________________________________________________
conv2d_46 (Conv2D)              (None, None, None, 1 179200      activation_45[0][0]              
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, None, None, 1 480         conv2d_46[0][0]                  
__________________________________________________________________________________________________
activation_46 (Activation)      (None, None, None, 1 0           batch_normalization_46[0][0]     
__________________________________________________________________________________________________
conv2d_42 (Conv2D)              (None, None, None, 1 122880      mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_47 (Conv2D)              (None, None, None, 1 179200      activation_46[0][0]              
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, None, None, 1 480         conv2d_42[0][0]                  
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, None, None, 1 480         conv2d_47[0][0]                  
__________________________________________________________________________________________________
activation_42 (Activation)      (None, None, None, 1 0           batch_normalization_42[0][0]     
__________________________________________________________________________________________________
activation_47 (Activation)      (None, None, None, 1 0           batch_normalization_47[0][0]     
__________________________________________________________________________________________________
conv2d_43 (Conv2D)              (None, None, None, 1 179200      activation_42[0][0]              
__________________________________________________________________________________________________
conv2d_48 (Conv2D)              (None, None, None, 1 179200      activation_47[0][0]              
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, None, None, 1 480         conv2d_43[0][0]                  
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, None, None, 1 480         conv2d_48[0][0]                  
__________________________________________________________________________________________________
activation_43 (Activation)      (None, None, None, 1 0           batch_normalization_43[0][0]     
__________________________________________________________________________________________________
activation_48 (Activation)      (None, None, None, 1 0           batch_normalization_48[0][0]     
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, None, None, 7 0           mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_41 (Conv2D)              (None, None, None, 1 147456      mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_44 (Conv2D)              (None, None, None, 1 215040      activation_43[0][0]              
__________________________________________________________________________________________________
conv2d_49 (Conv2D)              (None, None, None, 1 215040      activation_48[0][0]              
__________________________________________________________________________________________________
conv2d_50 (Conv2D)              (None, None, None, 1 147456      average_pooling2d_5[0][0]        
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, None, None, 1 576         conv2d_41[0][0]                  
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, None, None, 1 576         conv2d_44[0][0]                  
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, None, None, 1 576         conv2d_49[0][0]                  
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, None, None, 1 576         conv2d_50[0][0]                  
__________________________________________________________________________________________________
activation_41 (Activation)      (None, None, None, 1 0           batch_normalization_41[0][0]     
__________________________________________________________________________________________________
activation_44 (Activation)      (None, None, None, 1 0           batch_normalization_44[0][0]     
__________________________________________________________________________________________________
activation_49 (Activation)      (None, None, None, 1 0           batch_normalization_49[0][0]     
__________________________________________________________________________________________________
activation_50 (Activation)      (None, None, None, 1 0           batch_normalization_50[0][0]     
__________________________________________________________________________________________________
mixed5 (Concatenate)            (None, None, None, 7 0           activation_41[0][0]              
                                                                 activation_44[0][0]              
                                                                 activation_49[0][0]              
                                                                 activation_50[0][0]              
__________________________________________________________________________________________________
conv2d_55 (Conv2D)              (None, None, None, 1 122880      mixed5[0][0]                     
__________________________________________________________________________________________________
batch_normalization_55 (BatchNo (None, None, None, 1 480         conv2d_55[0][0]                  
__________________________________________________________________________________________________
activation_55 (Activation)      (None, None, None, 1 0           batch_normalization_55[0][0]     
__________________________________________________________________________________________________
conv2d_56 (Conv2D)              (None, None, None, 1 179200      activation_55[0][0]              
__________________________________________________________________________________________________
batch_normalization_56 (BatchNo (None, None, None, 1 480         conv2d_56[0][0]                  
__________________________________________________________________________________________________
activation_56 (Activation)      (None, None, None, 1 0           batch_normalization_56[0][0]     
__________________________________________________________________________________________________
conv2d_52 (Conv2D)              (None, None, None, 1 122880      mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_57 (Conv2D)              (None, None, None, 1 179200      activation_56[0][0]              
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, None, None, 1 480         conv2d_52[0][0]                  
__________________________________________________________________________________________________
batch_normalization_57 (BatchNo (None, None, None, 1 480         conv2d_57[0][0]                  
__________________________________________________________________________________________________
activation_52 (Activation)      (None, None, None, 1 0           batch_normalization_52[0][0]     
__________________________________________________________________________________________________
activation_57 (Activation)      (None, None, None, 1 0           batch_normalization_57[0][0]     
__________________________________________________________________________________________________
conv2d_53 (Conv2D)              (None, None, None, 1 179200      activation_52[0][0]              
__________________________________________________________________________________________________
conv2d_58 (Conv2D)              (None, None, None, 1 179200      activation_57[0][0]              
__________________________________________________________________________________________________
batch_normalization_53 (BatchNo (None, None, None, 1 480         conv2d_53[0][0]                  
__________________________________________________________________________________________________
batch_normalization_58 (BatchNo (None, None, None, 1 480         conv2d_58[0][0]                  
__________________________________________________________________________________________________
activation_53 (Activation)      (None, None, None, 1 0           batch_normalization_53[0][0]     
__________________________________________________________________________________________________
activation_58 (Activation)      (None, None, None, 1 0           batch_normalization_58[0][0]     
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, None, None, 7 0           mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_51 (Conv2D)              (None, None, None, 1 147456      mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_54 (Conv2D)              (None, None, None, 1 215040      activation_53[0][0]              
__________________________________________________________________________________________________
conv2d_59 (Conv2D)              (None, None, None, 1 215040      activation_58[0][0]              
__________________________________________________________________________________________________
conv2d_60 (Conv2D)              (None, None, None, 1 147456      average_pooling2d_6[0][0]        
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, None, None, 1 576         conv2d_51[0][0]                  
__________________________________________________________________________________________________
batch_normalization_54 (BatchNo (None, None, None, 1 576         conv2d_54[0][0]                  
__________________________________________________________________________________________________
batch_normalization_59 (BatchNo (None, None, None, 1 576         conv2d_59[0][0]                  
__________________________________________________________________________________________________
batch_normalization_60 (BatchNo (None, None, None, 1 576         conv2d_60[0][0]                  
__________________________________________________________________________________________________
activation_51 (Activation)      (None, None, None, 1 0           batch_normalization_51[0][0]     
__________________________________________________________________________________________________
activation_54 (Activation)      (None, None, None, 1 0           batch_normalization_54[0][0]     
__________________________________________________________________________________________________
activation_59 (Activation)      (None, None, None, 1 0           batch_normalization_59[0][0]     
__________________________________________________________________________________________________
activation_60 (Activation)      (None, None, None, 1 0           batch_normalization_60[0][0]     
__________________________________________________________________________________________________
mixed6 (Concatenate)            (None, None, None, 7 0           activation_51[0][0]              
                                                                 activation_54[0][0]              
                                                                 activation_59[0][0]              
                                                                 activation_60[0][0]              
__________________________________________________________________________________________________
conv2d_65 (Conv2D)              (None, None, None, 1 147456      mixed6[0][0]                     
__________________________________________________________________________________________________
batch_normalization_65 (BatchNo (None, None, None, 1 576         conv2d_65[0][0]                  
__________________________________________________________________________________________________
activation_65 (Activation)      (None, None, None, 1 0           batch_normalization_65[0][0]     
__________________________________________________________________________________________________
conv2d_66 (Conv2D)              (None, None, None, 1 258048      activation_65[0][0]              
__________________________________________________________________________________________________
batch_normalization_66 (BatchNo (None, None, None, 1 576         conv2d_66[0][0]                  
__________________________________________________________________________________________________
activation_66 (Activation)      (None, None, None, 1 0           batch_normalization_66[0][0]     
__________________________________________________________________________________________________
conv2d_62 (Conv2D)              (None, None, None, 1 147456      mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_67 (Conv2D)              (None, None, None, 1 258048      activation_66[0][0]              
__________________________________________________________________________________________________
batch_normalization_62 (BatchNo (None, None, None, 1 576         conv2d_62[0][0]                  
__________________________________________________________________________________________________
batch_normalization_67 (BatchNo (None, None, None, 1 576         conv2d_67[0][0]                  
__________________________________________________________________________________________________
activation_62 (Activation)      (None, None, None, 1 0           batch_normalization_62[0][0]     
__________________________________________________________________________________________________
activation_67 (Activation)      (None, None, None, 1 0           batch_normalization_67[0][0]     
__________________________________________________________________________________________________
conv2d_63 (Conv2D)              (None, None, None, 1 258048      activation_62[0][0]              
__________________________________________________________________________________________________
conv2d_68 (Conv2D)              (None, None, None, 1 258048      activation_67[0][0]              
__________________________________________________________________________________________________
batch_normalization_63 (BatchNo (None, None, None, 1 576         conv2d_63[0][0]                  
__________________________________________________________________________________________________
batch_normalization_68 (BatchNo (None, None, None, 1 576         conv2d_68[0][0]                  
__________________________________________________________________________________________________
activation_63 (Activation)      (None, None, None, 1 0           batch_normalization_63[0][0]     
__________________________________________________________________________________________________
activation_68 (Activation)      (None, None, None, 1 0           batch_normalization_68[0][0]     
__________________________________________________________________________________________________
average_pooling2d_7 (AveragePoo (None, None, None, 7 0           mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_61 (Conv2D)              (None, None, None, 1 147456      mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_64 (Conv2D)              (None, None, None, 1 258048      activation_63[0][0]              
__________________________________________________________________________________________________
conv2d_69 (Conv2D)              (None, None, None, 1 258048      activation_68[0][0]              
__________________________________________________________________________________________________
conv2d_70 (Conv2D)              (None, None, None, 1 147456      average_pooling2d_7[0][0]        
__________________________________________________________________________________________________
batch_normalization_61 (BatchNo (None, None, None, 1 576         conv2d_61[0][0]                  
__________________________________________________________________________________________________
batch_normalization_64 (BatchNo (None, None, None, 1 576         conv2d_64[0][0]                  
__________________________________________________________________________________________________
batch_normalization_69 (BatchNo (None, None, None, 1 576         conv2d_69[0][0]                  
__________________________________________________________________________________________________
batch_normalization_70 (BatchNo (None, None, None, 1 576         conv2d_70[0][0]                  
__________________________________________________________________________________________________
activation_61 (Activation)      (None, None, None, 1 0           batch_normalization_61[0][0]     
__________________________________________________________________________________________________
activation_64 (Activation)      (None, None, None, 1 0           batch_normalization_64[0][0]     
__________________________________________________________________________________________________
activation_69 (Activation)      (None, None, None, 1 0           batch_normalization_69[0][0]     
__________________________________________________________________________________________________
activation_70 (Activation)      (None, None, None, 1 0           batch_normalization_70[0][0]     
__________________________________________________________________________________________________
mixed7 (Concatenate)            (None, None, None, 7 0           activation_61[0][0]              
                                                                 activation_64[0][0]              
                                                                 activation_69[0][0]              
                                                                 activation_70[0][0]              
__________________________________________________________________________________________________
conv2d_73 (Conv2D)              (None, None, None, 1 147456      mixed7[0][0]                     
__________________________________________________________________________________________________
batch_normalization_73 (BatchNo (None, None, None, 1 576         conv2d_73[0][0]                  
__________________________________________________________________________________________________
activation_73 (Activation)      (None, None, None, 1 0           batch_normalization_73[0][0]     
__________________________________________________________________________________________________
conv2d_74 (Conv2D)              (None, None, None, 1 258048      activation_73[0][0]              
__________________________________________________________________________________________________
batch_normalization_74 (BatchNo (None, None, None, 1 576         conv2d_74[0][0]                  
__________________________________________________________________________________________________
activation_74 (Activation)      (None, None, None, 1 0           batch_normalization_74[0][0]     
__________________________________________________________________________________________________
conv2d_71 (Conv2D)              (None, None, None, 1 147456      mixed7[0][0]                     
__________________________________________________________________________________________________
conv2d_75 (Conv2D)              (None, None, None, 1 258048      activation_74[0][0]              
__________________________________________________________________________________________________
batch_normalization_71 (BatchNo (None, None, None, 1 576         conv2d_71[0][0]                  
__________________________________________________________________________________________________
batch_normalization_75 (BatchNo (None, None, None, 1 576         conv2d_75[0][0]                  
__________________________________________________________________________________________________
activation_71 (Activation)      (None, None, None, 1 0           batch_normalization_71[0][0]     
__________________________________________________________________________________________________
activation_75 (Activation)      (None, None, None, 1 0           batch_normalization_75[0][0]     
__________________________________________________________________________________________________
conv2d_72 (Conv2D)              (None, None, None, 3 552960      activation_71[0][0]              
__________________________________________________________________________________________________
conv2d_76 (Conv2D)              (None, None, None, 1 331776      activation_75[0][0]              
__________________________________________________________________________________________________
batch_normalization_72 (BatchNo (None, None, None, 3 960         conv2d_72[0][0]                  
__________________________________________________________________________________________________
batch_normalization_76 (BatchNo (None, None, None, 1 576         conv2d_76[0][0]                  
__________________________________________________________________________________________________
activation_72 (Activation)      (None, None, None, 3 0           batch_normalization_72[0][0]     
__________________________________________________________________________________________________
activation_76 (Activation)      (None, None, None, 1 0           batch_normalization_76[0][0]     
__________________________________________________________________________________________________
max_pooling2d_4 (MaxPooling2D)  (None, None, None, 7 0           mixed7[0][0]                     
__________________________________________________________________________________________________
mixed8 (Concatenate)            (None, None, None, 1 0           activation_72[0][0]              
                                                                 activation_76[0][0]              
                                                                 max_pooling2d_4[0][0]            
__________________________________________________________________________________________________
conv2d_81 (Conv2D)              (None, None, None, 4 573440      mixed8[0][0]                     
__________________________________________________________________________________________________
batch_normalization_81 (BatchNo (None, None, None, 4 1344        conv2d_81[0][0]                  
__________________________________________________________________________________________________
activation_81 (Activation)      (None, None, None, 4 0           batch_normalization_81[0][0]     
__________________________________________________________________________________________________
conv2d_78 (Conv2D)              (None, None, None, 3 491520      mixed8[0][0]                     
__________________________________________________________________________________________________
conv2d_82 (Conv2D)              (None, None, None, 3 1548288     activation_81[0][0]              
__________________________________________________________________________________________________
batch_normalization_78 (BatchNo (None, None, None, 3 1152        conv2d_78[0][0]                  
__________________________________________________________________________________________________
batch_normalization_82 (BatchNo (None, None, None, 3 1152        conv2d_82[0][0]                  
__________________________________________________________________________________________________
activation_78 (Activation)      (None, None, None, 3 0           batch_normalization_78[0][0]     
__________________________________________________________________________________________________
activation_82 (Activation)      (None, None, None, 3 0           batch_normalization_82[0][0]     
__________________________________________________________________________________________________
conv2d_79 (Conv2D)              (None, None, None, 3 442368      activation_78[0][0]              
__________________________________________________________________________________________________
conv2d_80 (Conv2D)              (None, None, None, 3 442368      activation_78[0][0]              
__________________________________________________________________________________________________
conv2d_83 (Conv2D)              (None, None, None, 3 442368      activation_82[0][0]              
__________________________________________________________________________________________________
conv2d_84 (Conv2D)              (None, None, None, 3 442368      activation_82[0][0]              
__________________________________________________________________________________________________
average_pooling2d_8 (AveragePoo (None, None, None, 1 0           mixed8[0][0]                     
__________________________________________________________________________________________________
conv2d_77 (Conv2D)              (None, None, None, 3 409600      mixed8[0][0]                     
__________________________________________________________________________________________________
batch_normalization_79 (BatchNo (None, None, None, 3 1152        conv2d_79[0][0]                  
__________________________________________________________________________________________________
batch_normalization_80 (BatchNo (None, None, None, 3 1152        conv2d_80[0][0]                  
__________________________________________________________________________________________________
batch_normalization_83 (BatchNo (None, None, None, 3 1152        conv2d_83[0][0]                  
__________________________________________________________________________________________________
batch_normalization_84 (BatchNo (None, None, None, 3 1152        conv2d_84[0][0]                  
__________________________________________________________________________________________________
conv2d_85 (Conv2D)              (None, None, None, 1 245760      average_pooling2d_8[0][0]        
__________________________________________________________________________________________________
batch_normalization_77 (BatchNo (None, None, None, 3 960         conv2d_77[0][0]                  
__________________________________________________________________________________________________
activation_79 (Activation)      (None, None, None, 3 0           batch_normalization_79[0][0]     
__________________________________________________________________________________________________
activation_80 (Activation)      (None, None, None, 3 0           batch_normalization_80[0][0]     
__________________________________________________________________________________________________
activation_83 (Activation)      (None, None, None, 3 0           batch_normalization_83[0][0]     
__________________________________________________________________________________________________
activation_84 (Activation)      (None, None, None, 3 0           batch_normalization_84[0][0]     
__________________________________________________________________________________________________
batch_normalization_85 (BatchNo (None, None, None, 1 576         conv2d_85[0][0]                  
__________________________________________________________________________________________________
activation_77 (Activation)      (None, None, None, 3 0           batch_normalization_77[0][0]     
__________________________________________________________________________________________________
mixed9_0 (Concatenate)          (None, None, None, 7 0           activation_79[0][0]              
                                                                 activation_80[0][0]              
__________________________________________________________________________________________________
concatenate_1 (Concatenate)     (None, None, None, 7 0           activation_83[0][0]              
                                                                 activation_84[0][0]              
__________________________________________________________________________________________________
activation_85 (Activation)      (None, None, None, 1 0           batch_normalization_85[0][0]     
__________________________________________________________________________________________________
mixed9 (Concatenate)            (None, None, None, 2 0           activation_77[0][0]              
                                                                 mixed9_0[0][0]                   
                                                                 concatenate_1[0][0]              
                                                                 activation_85[0][0]              
__________________________________________________________________________________________________
conv2d_90 (Conv2D)              (None, None, None, 4 917504      mixed9[0][0]                     
__________________________________________________________________________________________________
batch_normalization_90 (BatchNo (None, None, None, 4 1344        conv2d_90[0][0]                  
__________________________________________________________________________________________________
activation_90 (Activation)      (None, None, None, 4 0           batch_normalization_90[0][0]     
__________________________________________________________________________________________________
conv2d_87 (Conv2D)              (None, None, None, 3 786432      mixed9[0][0]                     
__________________________________________________________________________________________________
conv2d_91 (Conv2D)              (None, None, None, 3 1548288     activation_90[0][0]              
__________________________________________________________________________________________________
batch_normalization_87 (BatchNo (None, None, None, 3 1152        conv2d_87[0][0]                  
__________________________________________________________________________________________________
batch_normalization_91 (BatchNo (None, None, None, 3 1152        conv2d_91[0][0]                  
__________________________________________________________________________________________________
activation_87 (Activation)      (None, None, None, 3 0           batch_normalization_87[0][0]     
__________________________________________________________________________________________________
activation_91 (Activation)      (None, None, None, 3 0           batch_normalization_91[0][0]     
__________________________________________________________________________________________________
conv2d_88 (Conv2D)              (None, None, None, 3 442368      activation_87[0][0]              
__________________________________________________________________________________________________
conv2d_89 (Conv2D)              (None, None, None, 3 442368      activation_87[0][0]              
__________________________________________________________________________________________________
conv2d_92 (Conv2D)              (None, None, None, 3 442368      activation_91[0][0]              
__________________________________________________________________________________________________
conv2d_93 (Conv2D)              (None, None, None, 3 442368      activation_91[0][0]              
__________________________________________________________________________________________________
average_pooling2d_9 (AveragePoo (None, None, None, 2 0           mixed9[0][0]                     
__________________________________________________________________________________________________
conv2d_86 (Conv2D)              (None, None, None, 3 655360      mixed9[0][0]                     
__________________________________________________________________________________________________
batch_normalization_88 (BatchNo (None, None, None, 3 1152        conv2d_88[0][0]                  
__________________________________________________________________________________________________
batch_normalization_89 (BatchNo (None, None, None, 3 1152        conv2d_89[0][0]                  
__________________________________________________________________________________________________
batch_normalization_92 (BatchNo (None, None, None, 3 1152        conv2d_92[0][0]                  
__________________________________________________________________________________________________
batch_normalization_93 (BatchNo (None, None, None, 3 1152        conv2d_93[0][0]                  
__________________________________________________________________________________________________
conv2d_94 (Conv2D)              (None, None, None, 1 393216      average_pooling2d_9[0][0]        
__________________________________________________________________________________________________
batch_normalization_86 (BatchNo (None, None, None, 3 960         conv2d_86[0][0]                  
__________________________________________________________________________________________________
activation_88 (Activation)      (None, None, None, 3 0           batch_normalization_88[0][0]     
__________________________________________________________________________________________________
activation_89 (Activation)      (None, None, None, 3 0           batch_normalization_89[0][0]     
__________________________________________________________________________________________________
activation_92 (Activation)      (None, None, None, 3 0           batch_normalization_92[0][0]     
__________________________________________________________________________________________________
activation_93 (Activation)      (None, None, None, 3 0           batch_normalization_93[0][0]     
__________________________________________________________________________________________________
batch_normalization_94 (BatchNo (None, None, None, 1 576         conv2d_94[0][0]                  
__________________________________________________________________________________________________
activation_86 (Activation)      (None, None, None, 3 0           batch_normalization_86[0][0]     
__________________________________________________________________________________________________
mixed9_1 (Concatenate)          (None, None, None, 7 0           activation_88[0][0]              
                                                                 activation_89[0][0]              
__________________________________________________________________________________________________
concatenate_2 (Concatenate)     (None, None, None, 7 0           activation_92[0][0]              
                                                                 activation_93[0][0]              
__________________________________________________________________________________________________
activation_94 (Activation)      (None, None, None, 1 0           batch_normalization_94[0][0]     
__________________________________________________________________________________________________
mixed10 (Concatenate)           (None, None, None, 2 0           activation_86[0][0]              
                                                                 mixed9_1[0][0]                   
                                                                 concatenate_2[0][0]              
                                                                 activation_94[0][0]              
==================================================================================================
Total params: 21,802,784
Trainable params: 21,768,352
Non-trainable params: 34,432
__________________________________________________________________________________________________

In [5]:
# Dictionary mapping layers to coefficients.
# The coefficient quantifies how much the layer's activation contributes to the loss
# that will be maximized.
# Keys must correspond to names of the layers.
layer_contributions = {
    'mixed2' : 0.2,
    'mixed3' : 3.,
    #'mixed4' : 2.,
    'mixed5' : 1.5,
}

In [6]:
# Get the symbolic outputs of each "key" layer.
layer_dict = dict([(layer.name, layer) for layer in model.layers])

In [7]:
layer_dict


Out[7]:
{'activation_1': <keras.layers.core.Activation at 0x2ebc9e8cdd8>,
 'activation_10': <keras.layers.core.Activation at 0x2ec6ff9fc88>,
 'activation_11': <keras.layers.core.Activation at 0x2ec6ff8dbe0>,
 'activation_12': <keras.layers.core.Activation at 0x2ec701a9470>,
 'activation_13': <keras.layers.core.Activation at 0x2ec7029f400>,
 'activation_14': <keras.layers.core.Activation at 0x2ec702e0ef0>,
 'activation_15': <keras.layers.core.Activation at 0x2ec703dafd0>,
 'activation_16': <keras.layers.core.Activation at 0x2ec704a6630>,
 'activation_17': <keras.layers.core.Activation at 0x2ec705b0e80>,
 'activation_18': <keras.layers.core.Activation at 0x2ec706fbe48>,
 'activation_19': <keras.layers.core.Activation at 0x2ec708cfac8>,
 'activation_2': <keras.layers.core.Activation at 0x2ec6425d6a0>,
 'activation_20': <keras.layers.core.Activation at 0x2ec708c01d0>,
 'activation_21': <keras.layers.core.Activation at 0x2ec70abefd0>,
 'activation_22': <keras.layers.core.Activation at 0x2ec70be1c88>,
 'activation_23': <keras.layers.core.Activation at 0x2ec70bccbe0>,
 'activation_24': <keras.layers.core.Activation at 0x2ec70cd7a20>,
 'activation_25': <keras.layers.core.Activation at 0x2ec70e049b0>,
 'activation_26': <keras.layers.core.Activation at 0x2ec71008dd8>,
 'activation_27': <keras.layers.core.Activation at 0x2ec71100630>,
 'activation_28': <keras.layers.core.Activation at 0x2ec711e77b8>,
 'activation_29': <keras.layers.core.Activation at 0x2ec7120ebe0>,
 'activation_3': <keras.layers.core.Activation at 0x2ec6f79efd0>,
 'activation_30': <keras.layers.core.Activation at 0x2ec713fcdd8>,
 'activation_31': <keras.layers.core.Activation at 0x2ec7152bcc0>,
 'activation_32': <keras.layers.core.Activation at 0x2ec7151cb38>,
 'activation_33': <keras.layers.core.Activation at 0x2ec71652dd8>,
 'activation_34': <keras.layers.core.Activation at 0x2ec71815f60>,
 'activation_35': <keras.layers.core.Activation at 0x2ec7183deb8>,
 'activation_36': <keras.layers.core.Activation at 0x2ec71987630>,
 'activation_37': <keras.layers.core.Activation at 0x2ec71a3e668>,
 'activation_38': <keras.layers.core.Activation at 0x2ec71b4af60>,
 'activation_39': <keras.layers.core.Activation at 0x2ec71d3bd68>,
 'activation_4': <keras.layers.core.Activation at 0x2ec6f95fda0>,
 'activation_40': <keras.layers.core.Activation at 0x2ec71e6b9b0>,
 'activation_41': <keras.layers.core.Activation at 0x2ec71f74748>,
 'activation_42': <keras.layers.core.Activation at 0x2ec71f93d30>,
 'activation_43': <keras.layers.core.Activation at 0x2ec720b3eb8>,
 'activation_44': <keras.layers.core.Activation at 0x2ec7217de10>,
 'activation_45': <keras.layers.core.Activation at 0x2ec72295358>,
 'activation_46': <keras.layers.core.Activation at 0x2ec723a59e8>,
 'activation_47': <keras.layers.core.Activation at 0x2ec724d1e10>,
 'activation_48': <keras.layers.core.Activation at 0x2ec72683f60>,
 'activation_49': <keras.layers.core.Activation at 0x2ec726c3b00>,
 'activation_5': <keras.layers.core.Activation at 0x2ec6f993f98>,
 'activation_50': <keras.layers.core.Activation at 0x2ec728a8e80>,
 'activation_51': <keras.layers.core.Activation at 0x2ec729b6c88>,
 'activation_52': <keras.layers.core.Activation at 0x2ec729a2828>,
 'activation_53': <keras.layers.core.Activation at 0x2ec72abfb38>,
 'activation_54': <keras.layers.core.Activation at 0x2ec72bdc9b0>,
 'activation_55': <keras.layers.core.Activation at 0x2ec72d09f98>,
 'activation_56': <keras.layers.core.Activation at 0x2ec72e15da0>,
 'activation_57': <keras.layers.core.Activation at 0x2ec72ef1f98>,
 'activation_58': <keras.layers.core.Activation at 0x2ec72ffbac8>,
 'activation_59': <keras.layers.core.Activation at 0x2ec730e4fd0>,
 'activation_6': <keras.layers.core.Activation at 0x2ec6fb94f98>,
 'activation_60': <keras.layers.core.Activation at 0x2ec732f8b70>,
 'activation_61': <keras.layers.core.Activation at 0x2ec73405c88>,
 'activation_62': <keras.layers.core.Activation at 0x2ec733f7278>,
 'activation_63': <keras.layers.core.Activation at 0x2ec7352dda0>,
 'activation_64': <keras.layers.core.Activation at 0x2ec7364cf28>,
 'activation_65': <keras.layers.core.Activation at 0x2ec73747fd0>,
 'activation_66': <keras.layers.core.Activation at 0x2ec73832f60>,
 'activation_67': <keras.layers.core.Activation at 0x2ec73920630>,
 'activation_68': <keras.layers.core.Activation at 0x2ec73a29f28>,
 'activation_69': <keras.layers.core.Activation at 0x2ec73c1ad30>,
 'activation_7': <keras.layers.core.Activation at 0x2ec6fba4ba8>,
 'activation_70': <keras.layers.core.Activation at 0x2ec73d3d128>,
 'activation_71': <keras.layers.core.Activation at 0x2ec73e56710>,
 'activation_72': <keras.layers.core.Activation at 0x2ec73f54438>,
 'activation_73': <keras.layers.core.Activation at 0x2ec73f90e80>,
 'activation_74': <keras.layers.core.Activation at 0x2ec74058dd8>,
 'activation_75': <keras.layers.core.Activation at 0x2ec74177320>,
 'activation_76': <keras.layers.core.Activation at 0x2ec7424ff60>,
 'activation_77': <keras.layers.core.Activation at 0x2ec74486cc0>,
 'activation_78': <keras.layers.core.Activation at 0x2ec744a3b38>,
 'activation_79': <keras.layers.core.Activation at 0x2ec745c3f98>,
 'activation_8': <keras.layers.core.Activation at 0x2ec6fcd2fd0>,
 'activation_80': <keras.layers.core.Activation at 0x2ec74774dd8>,
 'activation_81': <keras.layers.core.Activation at 0x2ec747a9eb8>,
 'activation_82': <keras.layers.core.Activation at 0x2ec748c0d30>,
 'activation_83': <keras.layers.core.Activation at 0x2ec749e3eb8>,
 'activation_84': <keras.layers.core.Activation at 0x2ec74aaae10>,
 'activation_85': <keras.layers.core.Activation at 0x2ec74ca0b00>,
 'activation_86': <keras.layers.core.Activation at 0x2ec74cf2c88>,
 'activation_87': <keras.layers.core.Activation at 0x2ec74e11f28>,
 'activation_88': <keras.layers.core.Activation at 0x2ec74fbedd8>,
 'activation_89': <keras.layers.core.Activation at 0x2ec750e2e80>,
 'activation_9': <keras.layers.core.Activation at 0x2ec6fe7efd0>,
 'activation_90': <keras.layers.core.Activation at 0x2ec751efc88>,
 'activation_91': <keras.layers.core.Activation at 0x2ec751da438>,
 'activation_92': <keras.layers.core.Activation at 0x2ec752f3b38>,
 'activation_93': <keras.layers.core.Activation at 0x2ec754149b0>,
 'activation_94': <keras.layers.core.Activation at 0x2ec75619978>,
 'average_pooling2d_1': <keras.layers.pooling.AveragePooling2D at 0x2ec70099908>,
 'average_pooling2d_2': <keras.layers.pooling.AveragePooling2D at 0x2ec707c19e8>,
 'average_pooling2d_3': <keras.layers.pooling.AveragePooling2D at 0x2ec70efe2b0>,
 'average_pooling2d_4': <keras.layers.pooling.AveragePooling2D at 0x2ec71d5f8d0>,
 'average_pooling2d_5': <keras.layers.pooling.AveragePooling2D at 0x2ec727b2a58>,
 'average_pooling2d_6': <keras.layers.pooling.AveragePooling2D at 0x2ec731edac8>,
 'average_pooling2d_7': <keras.layers.pooling.AveragePooling2D at 0x2ec73c3e898>,
 'average_pooling2d_8': <keras.layers.pooling.AveragePooling2D at 0x2ec74bd5be0>,
 'average_pooling2d_9': <keras.layers.pooling.AveragePooling2D at 0x2ec7550ce48>,
 'batch_normalization_1': <keras.layers.normalization.BatchNormalization at 0x2ebc9e8c828>,
 'batch_normalization_10': <keras.layers.normalization.BatchNormalization at 0x2ec6fee8358>,
 'batch_normalization_11': <keras.layers.normalization.BatchNormalization at 0x2ec70000710>,
 'batch_normalization_12': <keras.layers.normalization.BatchNormalization at 0x2ec6422fb70>,
 'batch_normalization_13': <keras.layers.normalization.BatchNormalization at 0x2ec701b7780>,
 'batch_normalization_14': <keras.layers.normalization.BatchNormalization at 0x2ec702b0c50>,
 'batch_normalization_15': <keras.layers.normalization.BatchNormalization at 0x2ec703ed4e0>,
 'batch_normalization_16': <keras.layers.normalization.BatchNormalization at 0x2ec704e69e8>,
 'batch_normalization_17': <keras.layers.normalization.BatchNormalization at 0x2ec70598da0>,
 'batch_normalization_18': <keras.layers.normalization.BatchNormalization at 0x2ec706a4cf8>,
 'batch_normalization_19': <keras.layers.normalization.BatchNormalization at 0x2ec707b35f8>,
 'batch_normalization_2': <keras.layers.normalization.BatchNormalization at 0x2ebc9e54780>,
 'batch_normalization_20': <keras.layers.normalization.BatchNormalization at 0x2ec708ed438>,
 'batch_normalization_21': <keras.layers.normalization.BatchNormalization at 0x2ec709c9358>,
 'batch_normalization_22': <keras.layers.normalization.BatchNormalization at 0x2ec70b26358>,
 'batch_normalization_23': <keras.layers.normalization.BatchNormalization at 0x2ec70c41710>,
 'batch_normalization_24': <keras.layers.normalization.BatchNormalization at 0x2ec70d28400>,
 'batch_normalization_25': <keras.layers.normalization.BatchNormalization at 0x2ec70e22940>,
 'batch_normalization_26': <keras.layers.normalization.BatchNormalization at 0x2ec70f9b2b0>,
 'batch_normalization_27': <keras.layers.normalization.BatchNormalization at 0x2ec7101a160>,
 'batch_normalization_28': <keras.layers.normalization.BatchNormalization at 0x2ec71140e48>,
 'batch_normalization_29': <keras.layers.normalization.BatchNormalization at 0x2ec712395f8>,
 'batch_normalization_3': <keras.layers.normalization.BatchNormalization at 0x2ec6f7b04e0>,
 'batch_normalization_30': <keras.layers.normalization.BatchNormalization at 0x2ec71307860>,
 'batch_normalization_31': <keras.layers.normalization.BatchNormalization at 0x2ec714812b0>,
 'batch_normalization_32': <keras.layers.normalization.BatchNormalization at 0x2ec7155bdd8>,
 'batch_normalization_33': <keras.layers.normalization.BatchNormalization at 0x2ec71626630>,
 'batch_normalization_34': <keras.layers.normalization.BatchNormalization at 0x2ec717ccba8>,
 'batch_normalization_35': <keras.layers.normalization.BatchNormalization at 0x2ec718d3ba8>,
 'batch_normalization_36': <keras.layers.normalization.BatchNormalization at 0x2ec719749e8>,
 'batch_normalization_37': <keras.layers.normalization.BatchNormalization at 0x2ec71a2be10>,
 'batch_normalization_38': <keras.layers.normalization.BatchNormalization at 0x2ec71b77588>,
 'batch_normalization_39': <keras.layers.normalization.BatchNormalization at 0x2ec71c457f0>,
 'batch_normalization_4': <keras.layers.normalization.BatchNormalization at 0x2ec6f865710>,
 'batch_normalization_40': <keras.layers.normalization.BatchNormalization at 0x2ec71db28d0>,
 'batch_normalization_41': <keras.layers.normalization.BatchNormalization at 0x2ec71e7b3c8>,
 'batch_normalization_42': <keras.layers.normalization.BatchNormalization at 0x2ec7201def0>,
 'batch_normalization_43': <keras.layers.normalization.BatchNormalization at 0x2ec72085c18>,
 'batch_normalization_44': <keras.layers.normalization.BatchNormalization at 0x2ec721c04a8>,
 'batch_normalization_45': <keras.layers.normalization.BatchNormalization at 0x2ec722b69b0>,
 'batch_normalization_46': <keras.layers.normalization.BatchNormalization at 0x2ec723f2f28>,
 'batch_normalization_47': <keras.layers.normalization.BatchNormalization at 0x2ec72490eb8>,
 'batch_normalization_48': <keras.layers.normalization.BatchNormalization at 0x2ec7266df60>,
 'batch_normalization_49': <keras.layers.normalization.BatchNormalization at 0x2ec726e9e10>,
 'batch_normalization_5': <keras.layers.normalization.BatchNormalization at 0x2ec6fa54f28>,
 'batch_normalization_50': <keras.layers.normalization.BatchNormalization at 0x2ec72810a20>,
 'batch_normalization_51': <keras.layers.normalization.BatchNormalization at 0x2ec728cb9e8>,
 'batch_normalization_52': <keras.layers.normalization.BatchNormalization at 0x2ec72a19710>,
 'batch_normalization_53': <keras.layers.normalization.BatchNormalization at 0x2ec72b03438>,
 'batch_normalization_54': <keras.layers.normalization.BatchNormalization at 0x2ec72bfc940>,
 'batch_normalization_55': <keras.layers.normalization.BatchNormalization at 0x2ec72cdae48>,
 'batch_normalization_56': <keras.layers.normalization.BatchNormalization at 0x2ec72dc1c50>,
 'batch_normalization_57': <keras.layers.normalization.BatchNormalization at 0x2ec72fb1ef0>,
 'batch_normalization_58': <keras.layers.normalization.BatchNormalization at 0x2ec730056d8>,
 'batch_normalization_59': <keras.layers.normalization.BatchNormalization at 0x2ec731135c0>,
 'batch_normalization_6': <keras.layers.normalization.BatchNormalization at 0x2ec6faa7ac8>,
 'batch_normalization_60': <keras.layers.normalization.BatchNormalization at 0x2ec731e2908>,
 'batch_normalization_61': <keras.layers.normalization.BatchNormalization at 0x2ec732ed0f0>,
 'batch_normalization_62': <keras.layers.normalization.BatchNormalization at 0x2ec734255c0>,
 'batch_normalization_63': <keras.layers.normalization.BatchNormalization at 0x2ec735005f8>,
 'batch_normalization_64': <keras.layers.normalization.BatchNormalization at 0x2ec7361dc88>,
 'batch_normalization_65': <keras.layers.normalization.BatchNormalization at 0x2ec73776908>,
 'batch_normalization_66': <keras.layers.normalization.BatchNormalization at 0x2ec738f3f98>,
 'batch_normalization_67': <keras.layers.normalization.BatchNormalization at 0x2ec73909b70>,
 'batch_normalization_68': <keras.layers.normalization.BatchNormalization at 0x2ec73a59550>,
 'batch_normalization_69': <keras.layers.normalization.BatchNormalization at 0x2ec73b217b8>,
 'batch_normalization_7': <keras.layers.normalization.BatchNormalization at 0x2ec6fbc58d0>,
 'batch_normalization_70': <keras.layers.normalization.BatchNormalization at 0x2ec73cdd390>,
 'batch_normalization_71': <keras.layers.normalization.BatchNormalization at 0x2ec73d5bf28>,
 'batch_normalization_72': <keras.layers.normalization.BatchNormalization at 0x2ec73edbfd0>,
 'batch_normalization_73': <keras.layers.normalization.BatchNormalization at 0x2ec73f64588>,
 'batch_normalization_74': <keras.layers.normalization.BatchNormalization at 0x2ec7409d470>,
 'batch_normalization_75': <keras.layers.normalization.BatchNormalization at 0x2ec74196978>,
 'batch_normalization_76': <keras.layers.normalization.BatchNormalization at 0x2ec742a3b70>,
 'batch_normalization_77': <keras.layers.normalization.BatchNormalization at 0x2ec74405c18>,
 'batch_normalization_78': <keras.layers.normalization.BatchNormalization at 0x2ec744b5898>,
 'batch_normalization_79': <keras.layers.normalization.BatchNormalization at 0x2ec74595c50>,
 'batch_normalization_8': <keras.layers.normalization.BatchNormalization at 0x2ec6fca2c88>,
 'batch_normalization_80': <keras.layers.normalization.BatchNormalization at 0x2ec7467b8d0>,
 'batch_normalization_81': <keras.layers.normalization.BatchNormalization at 0x2ec747dc320>,
 'batch_normalization_82': <keras.layers.normalization.BatchNormalization at 0x2ec74950ef0>,
 'batch_normalization_83': <keras.layers.normalization.BatchNormalization at 0x2ec749b2c18>,
 'batch_normalization_84': <keras.layers.normalization.BatchNormalization at 0x2ec74aee4a8>,
 'batch_normalization_85': <keras.layers.normalization.BatchNormalization at 0x2ec74ba65f8>,
 'batch_normalization_86': <keras.layers.normalization.BatchNormalization at 0x2ec74d5bc50>,
 'batch_normalization_87': <keras.layers.normalization.BatchNormalization at 0x2ec74dfc5f8>,
 'batch_normalization_88': <keras.layers.normalization.BatchNormalization at 0x2ec74ec8860>,
 'batch_normalization_89': <keras.layers.normalization.BatchNormalization at 0x2ec75002f60>,
 'batch_normalization_9': <keras.layers.normalization.BatchNormalization at 0x2ec6fd88908>,
 'batch_normalization_90': <keras.layers.normalization.BatchNormalization at 0x2ec750ceb38>,
 'batch_normalization_91': <keras.layers.normalization.BatchNormalization at 0x2ec7524b710>,
 'batch_normalization_92': <keras.layers.normalization.BatchNormalization at 0x2ec75335438>,
 'batch_normalization_93': <keras.layers.normalization.BatchNormalization at 0x2ec75433940>,
 'batch_normalization_94': <keras.layers.normalization.BatchNormalization at 0x2ec754e9eb8>,
 'concatenate_1': <keras.layers.merge.Concatenate at 0x2ec74bb6898>,
 'concatenate_2': <keras.layers.merge.Concatenate at 0x2ec7550c2b0>,
 'conv2d_1': <keras.layers.convolutional.Conv2D at 0x2ebc9e547b8>,
 'conv2d_10': <keras.layers.convolutional.Conv2D at 0x2ec6fea39e8>,
 'conv2d_11': <keras.layers.convolutional.Conv2D at 0x2ec6ffb2ef0>,
 'conv2d_12': <keras.layers.convolutional.Conv2D at 0x2ebc9ed2b38>,
 'conv2d_13': <keras.layers.convolutional.Conv2D at 0x2ec701a9ef0>,
 'conv2d_14': <keras.layers.convolutional.Conv2D at 0x2ec702b0f60>,
 'conv2d_15': <keras.layers.convolutional.Conv2D at 0x2ec7039c9b0>,
 'conv2d_16': <keras.layers.convolutional.Conv2D at 0x2ec704b4898>,
 'conv2d_17': <keras.layers.convolutional.Conv2D at 0x2ec705bfdd8>,
 'conv2d_18': <keras.layers.convolutional.Conv2D at 0x2ec706b9cf8>,
 'conv2d_19': <keras.layers.convolutional.Conv2D at 0x2ec707b3860>,
 'conv2d_2': <keras.layers.convolutional.Conv2D at 0x2ebc9e545f8>,
 'conv2d_20': <keras.layers.convolutional.Conv2D at 0x2ec708def28>,
 'conv2d_21': <keras.layers.convolutional.Conv2D at 0x2ec709d7ba8>,
 'conv2d_22': <keras.layers.convolutional.Conv2D at 0x2ec70ae59e8>,
 'conv2d_23': <keras.layers.convolutional.Conv2D at 0x2ec70bf3ef0>,
 'conv2d_24': <keras.layers.convolutional.Conv2D at 0x2ec70ce6cc0>,
 'conv2d_25': <keras.layers.convolutional.Conv2D at 0x2ec70df37f0>,
 'conv2d_26': <keras.layers.convolutional.Conv2D at 0x2ec70edaeb8>,
 'conv2d_27': <keras.layers.convolutional.Conv2D at 0x2ec71038748>,
 'conv2d_28': <keras.layers.convolutional.Conv2D at 0x2ec71110f28>,
 'conv2d_29': <keras.layers.convolutional.Conv2D at 0x2ec7121fa20>,
 'conv2d_3': <keras.layers.convolutional.Conv2D at 0x2ec6f75f940>,
 'conv2d_30': <keras.layers.convolutional.Conv2D at 0x2ec71316ac8>,
 'conv2d_31': <keras.layers.convolutional.Conv2D at 0x2ec71453978>,
 'conv2d_32': <keras.layers.convolutional.Conv2D at 0x2ec71509cf8>,
 'conv2d_33': <keras.layers.convolutional.Conv2D at 0x2ec716347f0>,
 'conv2d_34': <keras.layers.convolutional.Conv2D at 0x2ec71745c18>,
 'conv2d_35': <keras.layers.convolutional.Conv2D at 0x2ec7182da20>,
 'conv2d_36': <keras.layers.convolutional.Conv2D at 0x2ec71944940>,
 'conv2d_37': <keras.layers.convolutional.Conv2D at 0x2ec71a4feb8>,
 'conv2d_38': <keras.layers.convolutional.Conv2D at 0x2ec71b5e9b0>,
 'conv2d_39': <keras.layers.convolutional.Conv2D at 0x2ec71c53a90>,
 'conv2d_4': <keras.layers.convolutional.Conv2D at 0x2ec6f8940f0>,
 'conv2d_40': <keras.layers.convolutional.Conv2D at 0x2ec71d91908>,
 'conv2d_41': <keras.layers.convolutional.Conv2D at 0x2ec71f16080>,
 'conv2d_42': <keras.layers.convolutional.Conv2D at 0x2ec71f53940>,
 'conv2d_43': <keras.layers.convolutional.Conv2D at 0x2ec72085f28>,
 'conv2d_44': <keras.layers.convolutional.Conv2D at 0x2ec7216e978>,
 'conv2d_45': <keras.layers.convolutional.Conv2D at 0x2ec72286898>,
 'conv2d_46': <keras.layers.convolutional.Conv2D at 0x2ec72393da0>,
 'conv2d_47': <keras.layers.convolutional.Conv2D at 0x2ec72490a20>,
 'conv2d_48': <keras.layers.convolutional.Conv2D at 0x2ec725989e8>,
 'conv2d_49': <keras.layers.convolutional.Conv2D at 0x2ec726a4278>,
 'conv2d_5': <keras.layers.convolutional.Conv2D at 0x2ec6f981978>,
 'conv2d_50': <keras.layers.convolutional.Conv2D at 0x2ec727a1b70>,
 'conv2d_51': <keras.layers.convolutional.Conv2D at 0x2ec72918ac8>,
 'conv2d_52': <keras.layers.convolutional.Conv2D at 0x2ec729c94e0>,
 'conv2d_53': <keras.layers.convolutional.Conv2D at 0x2ec72ab0908>,
 'conv2d_54': <keras.layers.convolutional.Conv2D at 0x2ec72bcd828>,
 'conv2d_55': <keras.layers.convolutional.Conv2D at 0x2ec72cda2b0>,
 'conv2d_56': <keras.layers.convolutional.Conv2D at 0x2ec72dc1a58>,
 'conv2d_57': <keras.layers.convolutional.Conv2D at 0x2ec72edc978>,
 'conv2d_58': <keras.layers.convolutional.Conv2D at 0x2ec72febef0>,
 'conv2d_59': <keras.layers.convolutional.Conv2D at 0x2ec730f59e8>,
 'conv2d_6': <keras.layers.convolutional.Conv2D at 0x2ec6fb272e8>,
 'conv2d_60': <keras.layers.convolutional.Conv2D at 0x2ec731e2cc0>,
 'conv2d_61': <keras.layers.convolutional.Conv2D at 0x2ec73319e10>,
 'conv2d_62': <keras.layers.convolutional.Conv2D at 0x2ec73425f60>,
 'conv2d_63': <keras.layers.convolutional.Conv2D at 0x2ec7350e7b8>,
 'conv2d_64': <keras.layers.convolutional.Conv2D at 0x2ec7361dfd0>,
 'conv2d_65': <keras.layers.convolutional.Conv2D at 0x2ec737059e8>,
 'conv2d_66': <keras.layers.convolutional.Conv2D at 0x2ec73822908>,
 'conv2d_67': <keras.layers.convolutional.Conv2D at 0x2ec7392fe80>,
 'conv2d_68': <keras.layers.convolutional.Conv2D at 0x2ec73a3b978>,
 'conv2d_69': <keras.layers.convolutional.Conv2D at 0x2ec73b31a58>,
 'conv2d_7': <keras.layers.convolutional.Conv2D at 0x2ec6fbc5ef0>,
 'conv2d_70': <keras.layers.convolutional.Conv2D at 0x2ec73cc8f60>,
 'conv2d_71': <keras.layers.convolutional.Conv2D at 0x2ec73d7bf60>,
 'conv2d_72': <keras.layers.convolutional.Conv2D at 0x2ec73e34908>,
 'conv2d_73': <keras.layers.convolutional.Conv2D at 0x2ec73f64ef0>,
 'conv2d_74': <keras.layers.convolutional.Conv2D at 0x2ec7404b940>,
 'conv2d_75': <keras.layers.convolutional.Conv2D at 0x2ec74166860>,
 'conv2d_76': <keras.layers.convolutional.Conv2D at 0x2ec74272898>,
 'conv2d_77': <keras.layers.convolutional.Conv2D at 0x2ec74428860>,
 'conv2d_78': <keras.layers.convolutional.Conv2D at 0x2ec744b5eb8>,
 'conv2d_79': <keras.layers.convolutional.Conv2D at 0x2ec74595a90>,
 'conv2d_8': <keras.layers.convolutional.Conv2D at 0x2ec6fca2ac8>,
 'conv2d_80': <keras.layers.convolutional.Conv2D at 0x2ec7468bb70>,
 'conv2d_81': <keras.layers.convolutional.Conv2D at 0x2ec747999e8>,
 'conv2d_82': <keras.layers.convolutional.Conv2D at 0x2ec748a2748>,
 'conv2d_83': <keras.layers.convolutional.Conv2D at 0x2ec749b2f28>,
 'conv2d_84': <keras.layers.convolutional.Conv2D at 0x2ec74a9a978>,
 'conv2d_85': <keras.layers.convolutional.Conv2D at 0x2ec74ba64a8>,
 'conv2d_86': <keras.layers.convolutional.Conv2D at 0x2ec74d7e898>,
 'conv2d_87': <keras.layers.convolutional.Conv2D at 0x2ec74de2be0>,
 'conv2d_88': <keras.layers.convolutional.Conv2D at 0x2ec74ed6b00>,
 'conv2d_89': <keras.layers.convolutional.Conv2D at 0x2ec74fe4940>,
 'conv2d_9': <keras.layers.convolutional.Conv2D at 0x2ec6fd96b70>,
 'conv2d_90': <keras.layers.convolutional.Conv2D at 0x2ec750f3b70>,
 'conv2d_91': <keras.layers.convolutional.Conv2D at 0x2ec751fcfd0>,
 'conv2d_92': <keras.layers.convolutional.Conv2D at 0x2ec752e6908>,
 'conv2d_93': <keras.layers.convolutional.Conv2D at 0x2ec75403828>,
 'conv2d_94': <keras.layers.convolutional.Conv2D at 0x2ec7553ef98>,
 'input_1': <keras.engine.topology.InputLayer at 0x2ebc9e54470>,
 'max_pooling2d_1': <keras.layers.pooling.MaxPooling2D at 0x2ec6f876898>,
 'max_pooling2d_2': <keras.layers.pooling.MaxPooling2D at 0x2ec6fa8cf28>,
 'max_pooling2d_3': <keras.layers.pooling.MaxPooling2D at 0x2ec71423940>,
 'max_pooling2d_4': <keras.layers.pooling.MaxPooling2D at 0x2ec7436dcc0>,
 'mixed0': <keras.layers.merge.Concatenate at 0x2ec701a9f98>,
 'mixed1': <keras.layers.merge.Concatenate at 0x2ec7093eeb8>,
 'mixed10': <keras.layers.merge.Concatenate at 0x2ec755fa630>,
 'mixed2': <keras.layers.merge.Concatenate at 0x2ec71038d68>,
 'mixed3': <keras.layers.merge.Concatenate at 0x2ec71453f98>,
 'mixed4': <keras.layers.merge.Concatenate at 0x2ec71e9b9b0>,
 'mixed5': <keras.layers.merge.Concatenate at 0x2ec728bdb00>,
 'mixed6': <keras.layers.merge.Concatenate at 0x2ec7336cf98>,
 'mixed7': <keras.layers.merge.Concatenate at 0x2ec73d49f60>,
 'mixed8': <keras.layers.merge.Concatenate at 0x2ec743808d0>,
 'mixed9': <keras.layers.merge.Concatenate at 0x2ec74cc1f60>,
 'mixed9_0': <keras.layers.merge.Concatenate at 0x2ec747999b0>,
 'mixed9_1': <keras.layers.merge.Concatenate at 0x2ec750f3d30>}

In [8]:
layer_contributions.keys()


Out[8]:
dict_keys(['mixed2', 'mixed3', 'mixed5'])

In [9]:
for keras_layer in set(layer_dict.keys()).intersection(set(layer_contributions.keys())):
    print(keras_layer + ': ' + str(layer_dict[keras_layer]))
    layer = model.get_layer(name = keras_layer)
    print('\t' + str(layer.output_shape))


mixed3: <keras.layers.merge.Concatenate object at 0x000002EC71453F98>
	(None, None, None, 768)
mixed5: <keras.layers.merge.Concatenate object at 0x000002EC728BDB00>
	(None, None, None, 768)
mixed2: <keras.layers.merge.Concatenate object at 0x000002EC71038D68>
	(None, None, None, 288)

In [10]:
# Define loss as Keras variable and initialize it as zero.
loss = K.variable(0.)

In [11]:
for layer_name in layer_contributions:
    print('Layer: ' + str(layer_name))
    coeff = layer_contributions[layer_name]
    print('\tCoeff: ' + str(coeff))
    activation = layer_dict[layer_name].output
    print('\tActivation: ' + str(activation))
    scaling = K.prod(K.cast(K.shape(activation), 'float32'))
    print('\tScaling: ' + str(scaling))
    # Excluding border pixels to avoid border issues
    layer_loss = coeff * K.sum(K.square(activation[:, 2: -2, 2: -2, :])) / scaling
    print('\tLayer loss: ' + str(layer_loss))
    loss += layer_loss


Layer: mixed2
	Coeff: 0.2
	Activation: Tensor("mixed2/concat:0", shape=(?, ?, ?, 288), dtype=float32)
	Scaling: Tensor("Prod:0", shape=(), dtype=float32)
	Layer loss: Tensor("truediv:0", shape=(), dtype=float32)
Layer: mixed3
	Coeff: 3.0
	Activation: Tensor("mixed3/concat:0", shape=(?, ?, ?, 768), dtype=float32)
	Scaling: Tensor("Prod_1:0", shape=(), dtype=float32)
	Layer loss: Tensor("truediv_1:0", shape=(), dtype=float32)
Layer: mixed5
	Coeff: 1.5
	Activation: Tensor("mixed5/concat:0", shape=(?, ?, ?, 768), dtype=float32)
	Scaling: Tensor("Prod_2:0", shape=(), dtype=float32)
	Layer loss: Tensor("truediv_2:0", shape=(), dtype=float32)

In [12]:
loss


Out[12]:
<tf.Tensor 'add_2:0' shape=() dtype=float32>

Gradient ascent for picture generation


In [13]:
# Tensor of shape (?, ?, ?, 3) to hold the generated image
dream = model.input

In [14]:
# Computing the gradients of the dreamed image with regard to the loss
grads = K.gradients(loss, dream)[0]

In [15]:
# Normalize gradients
grads /= K.maximum(K.mean(K.abs(grads)), 1e-7)

In [16]:
# Set up function to retrieve the value
# of the loss and gradients given an input image.
outputs = [loss, grads]
fetch_loss_and_grads = K.function([dream], outputs)

def eval_loss_and_grads(x):
    outs = fetch_loss_and_grads([x])
    loss_value = outs[0]
    grad_values = outs[1]
    return loss_value, grad_values

def gradient_ascent(x, iterations, step, max_loss=None):
    for i in range(iterations):
        loss_value, grad_values = eval_loss_and_grads(x)
        if max_loss is not None and loss_value > max_loss:
            break
        print('...Loss value at', i, ':', loss_value)
        x += step * grad_values
    return x

Image preprocessing


In [17]:
import scipy
import numpy as np
from keras.preprocessing import image

In [18]:
IMAGE_PATH = './data/Chapter 8.2 - Deep Dream/image.jpg'

In [19]:
def load_image_to_array(image_path):
    img = image.load_img(image_path)
    img = image.img_to_array(img)
    return img

In [20]:
img = load_image_to_array(image_path = IMAGE_PATH)

In [21]:
img.shape


Out[21]:
(1260, 2048, 3)

In [22]:
img = np.expand_dims(img, axis = 0)

In [23]:
img.shape


Out[23]:
(1, 1260, 2048, 3)

In [24]:
from keras.applications.inception_v3 import preprocess_input as inceptionV3_preprocess_input

In [25]:
def preprocess_image(image_path):
    # Util function to open, resize and format pictures
    # into appropriate tensors.
    img = load_image_to_array(image_path = image_path)
    img = np.expand_dims(img, axis = 0)
    img = inceptionV3_preprocess_input(img)
    return img

In [26]:
img = preprocess_image(image_path = IMAGE_PATH)

In [27]:
img.shape


Out[27]:
(1, 1260, 2048, 3)

In [28]:
# Playing with these hyperparameters will also allow you to achieve new effects

# Gradient ascent step size
STEP = 0.01 

# Number of scales at which to run gradient ascent
num_octave = 3  

# Size ratio between scales
octave_scale = 1.4  

# Number of ascent steps per scale
ITERATIONS = 20

In [29]:
# If our loss gets larger than 10,
# we will interrupt the gradient ascent process, to avoid ugly artifacts
MAX_LOSS = 10.

In [30]:
# Load the image into a Numpy array
img = preprocess_image(image_path = IMAGE_PATH)

In [31]:
img.shape


Out[31]:
(1, 1260, 2048, 3)

In [32]:
# We prepare a list of shape tuples
# defining the different scales at which we will run gradient ascent
original_shape = img.shape[1:3]
original_shape


Out[32]:
(1260, 2048)

In [33]:
successive_shapes = [original_shape]
successive_shapes


Out[33]:
[(1260, 2048)]

In [34]:
for i in range(1, num_octave):
    shape = tuple([int(dim / (octave_scale ** i)) for dim in original_shape])
    successive_shapes.append(shape)

In [35]:
successive_shapes


Out[35]:
[(1260, 2048), (900, 1462), (642, 1044)]

In [36]:
# Reverse list of shapes, so that they are in increasing order
successive_shapes = successive_shapes[::-1]

In [37]:
successive_shapes


Out[37]:
[(642, 1044), (900, 1462), (1260, 2048)]

In [38]:
def resize_img(img, size):
    img = np.copy(img)
    factors = (1,
               float(size[0]) / img.shape[1],
               float(size[1]) / img.shape[2],
               1)
    return scipy.ndimage.zoom(img, factors, order=1)

In [39]:
def deprocess_image(x):
    # Util function to convert a tensor into a valid image.
    if K.image_data_format() == 'channels_first':
        x = x.reshape((3, x.shape[2], x.shape[3]))
        x = x.transpose((1, 2, 0))
    else:
        x = x.reshape((x.shape[1], x.shape[2], 3))
    # Normalization
    x /= 2.
    x += 0.5
    x *= 255.
    x = np.clip(x, 0, 255).astype('uint8')
    return x

In [40]:
import imageio

In [41]:
def save_img(img, fname):
    pil_img = deprocess_image(np.copy(img))
    imageio.imwrite(fname, pil_img)

In [42]:
# Resize the Numpy array of the image to our smallest scale
original_img = np.copy(img)
shrunk_original_img = resize_img(img, successive_shapes[0])

In [43]:
for shape in successive_shapes:
    print('Processing image shape', shape)
    img = resize_img(img, shape)
    img = gradient_ascent(x = img,
                          iterations = ITERATIONS,
                          step = STEP,
                          max_loss = MAX_LOSS)
    upscaled_shrunk_original_img = resize_img(shrunk_original_img, shape)
    same_size_original = resize_img(original_img, shape)
    lost_detail = same_size_original - upscaled_shrunk_original_img

    img += lost_detail
    shrunk_original_img = resize_img(original_img, shape)
    save_img(img, fname='./data/Chapter 8.2 - Deep Dream/dream_at_scale_' + str(shape) + '.png')

save_img(img, fname = './data/Chapter 8.2 - Deep Dream/final_dream.png')


Processing image shape (642, 1044)
...Loss value at 0 : 1.1246423
...Loss value at 1 : 1.4731867
...Loss value at 2 : 1.9847761
...Loss value at 3 : 2.638072
...Loss value at 4 : 3.34002
...Loss value at 5 : 4.025421
...Loss value at 6 : 4.667389
...Loss value at 7 : 5.311434
...Loss value at 8 : 5.906174
...Loss value at 9 : 6.490458
...Loss value at 10 : 7.044338
...Loss value at 11 : 7.5871515
...Loss value at 12 : 8.095409
...Loss value at 13 : 8.596305
...Loss value at 14 : 9.070893
...Loss value at 15 : 9.534389
...Loss value at 16 : 9.971168
Processing image shape (900, 1462)
...Loss value at 0 : 3.2439234
...Loss value at 1 : 4.486898
...Loss value at 2 : 5.4103613
...Loss value at 3 : 6.1980314
...Loss value at 4 : 6.914725
...Loss value at 5 : 7.5497103
...Loss value at 6 : 8.155141
...Loss value at 7 : 8.707602
...Loss value at 8 : 9.240088
...Loss value at 9 : 9.743059
Processing image shape (1260, 2048)
...Loss value at 0 : 3.2165728
...Loss value at 1 : 4.3598604
...Loss value at 2 : 5.243634
...Loss value at 3 : 6.015962
...Loss value at 4 : 6.7226954
...Loss value at 5 : 7.3770475
...Loss value at 6 : 7.9857926
...Loss value at 7 : 8.560323
...Loss value at 8 : 9.09658
...Loss value at 9 : 9.603898